name = "cargo-doc"
test = false
+[[bin]]
+name = "cargo-generate-lockfile"
+test = false
+
[[test]]
name = "tests"
--- /dev/null
+#![crate_name="cargo-generate-lockfile"]
+#![feature(phase)]
+
+extern crate cargo;
+
+#[phase(plugin, link)]
+extern crate hammer;
+
+#[phase(plugin, link)]
+extern crate log;
+
+extern crate serialize;
+
+use std::os;
+use cargo::ops;
+use cargo::{execute_main_without_stdin};
+use cargo::core::MultiShell;
+use cargo::util::{CliResult, CliError};
+use cargo::util::important_paths::find_project_manifest;
+
+#[deriving(PartialEq,Clone,Decodable,Encodable)]
+pub struct Options {
+ manifest_path: Option<String>
+}
+
+hammer_config!(Options)
+
+fn main() {
+ execute_main_without_stdin(execute);
+}
+
+fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+ debug!("executing; cmd=cargo-clean; args={}", os::args());
+
+ let root = match options.manifest_path {
+ Some(path) => Path::new(path),
+ None => try!(find_project_manifest(&os::getcwd(), "Cargo.toml")
+ .map_err(|_| {
+ CliError::new("Could not find Cargo.toml in this \
+ directory or any parent directory",
+ 102)
+ }))
+ };
+
+ ops::generate_lockfile(&root, shell, true)
+ .map(|_| None).map_err(|err| CliError::from_boxed(err, 101))
+}
})
.map_err(|e| CliError::from_boxed(e, 1)));
- let source_id = SourceId::for_git(&url, reference.as_slice());
+ let source_id = SourceId::for_git(&url, reference.as_slice(), None);
let mut config = try!(Config::new(shell, true, None, None).map_err(|e| {
CliError::from_boxed(e, 1)
#[deriving(PartialEq,Clone,Show)]
pub struct Dependency {
name: String,
- namespace: SourceId,
+ source_id: SourceId,
req: VersionReq,
transitive: bool
}
impl Dependency {
pub fn parse(name: &str, version: Option<&str>,
- namespace: &SourceId) -> CargoResult<Dependency> {
+ source_id: &SourceId) -> CargoResult<Dependency> {
let version = match version {
Some(v) => try!(VersionReq::parse(v)),
None => VersionReq::any()
Ok(Dependency {
name: name.to_string(),
- namespace: namespace.clone(),
+ source_id: source_id.clone(),
req: version,
transitive: true
})
self.name.as_slice()
}
- pub fn get_namespace(&self) -> &SourceId {
- &self.namespace
+ pub fn get_source_id(&self) -> &SourceId {
+ &self.source_id
}
pub fn as_dev(&self) -> Dependency {
}
pub fn matches(&self, sum: &Summary) -> bool {
- debug!("self={}; summary={}", self, sum);
- debug!(" a={}; b={}", self.namespace, sum.get_source_id());
+ debug!("matches; self={}; summary={}", self, sum);
+ debug!(" a={}; b={}", self.source_id, sum.get_source_id());
self.name.as_slice() == sum.get_name() &&
self.req.matches(sum.get_version()) &&
- &self.namespace == sum.get_source_id()
+ &self.source_id == sum.get_source_id()
}
}
}
}
-#[deriving(Clone, PartialEq)]
+#[deriving(Clone, PartialEq, PartialOrd, Ord)]
pub struct PackageId {
name: String,
version: semver::Version,
source_id: SourceId,
}
+impl<E, S: Encoder<E>> Encodable<S, E> for PackageId {
+ fn encode(&self, s: &mut S) -> Result<(), E> {
+ let source = self.source_id.to_url();
+ let encoded = format!("{} {} ({})", self.name, self.version, source);
+ encoded.encode(s)
+ }
+}
+
+impl<E, D: Decoder<E>> Decodable<D, E> for PackageId {
+ fn decode(d: &mut D) -> Result<PackageId, E> {
+ let string: String = raw_try!(Decodable::decode(d));
+ let regex = regex!(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$");
+ let captures = regex.captures(string.as_slice()).expect("invalid serialized PackageId");
+
+ let name = captures.at(1);
+ let version = semver::parse(captures.at(2)).expect("invalid version");
+ let source_id = SourceId::from_url(captures.at(3).to_string());
+
+ Ok(PackageId {
+ name: name.to_string(),
+ version: version,
+ source_id: source_id
+ })
+ }
+}
+
impl<S: hash::Writer> Hash<S> for PackageId {
fn hash(&self, state: &mut S) {
self.name.hash(state);
}
}
-impl<D: Decoder<Box<CargoError + Send>>>
- Decodable<D,Box<CargoError + Send>>
- for PackageId
-{
- fn decode(d: &mut D) -> CargoResult<PackageId> {
- let (name, version, source_id): (String, String, SourceId) = try!(Decodable::decode(d));
-
- PackageId::new(name.as_slice(), version.as_slice(), &source_id)
- }
-}
-
-impl<E, S: Encoder<E>> Encodable<S,E> for PackageId {
- fn encode(&self, e: &mut S) -> Result<(), E> {
- (self.name.clone(), self.version.to_string(), self.source_id.clone()).encode(e)
- }
-}
+//impl<D: Decoder<Box<CargoError + Send>>>
+ //Decodable<D,Box<CargoError + Send>>
+ //for PackageId
+//{
+ //fn decode(d: &mut D) -> CargoResult<PackageId> {
+ //let (name, version, source_id): (String, String, SourceId) = try!(Decodable::decode(d));
+
+ //PackageId::new(name.as_slice(), version.as_slice(), &source_id)
+ //}
+//}
+
+//impl<E, S: Encoder<E>> Encodable<S,E> for PackageId {
+ //fn encode(&self, e: &mut S) -> Result<(), E> {
+ //(self.name.clone(), self.version.to_string(), self.source_id.clone()).encode(e)
+ //}
+//}
#[cfg(test)]
mod tests {
impl Registry for Vec<Summary> {
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
+ debug!("querying, summaries={}",
+ self.iter().map(|s| s.get_package_id().to_string()).collect::<Vec<String>>());
+
Ok(self.iter()
.filter(|summary| dep.matches(*summary))
.map(|summary| summary.clone())
Ok(())
}
- fn load(&mut self, namespace: &SourceId, override: bool) -> CargoResult<()> {
+ fn ensure_loaded(&mut self, source_id: &SourceId) -> CargoResult<()> {
+ if self.searched.contains(source_id) { return Ok(()); }
+ try!(self.load(source_id, false));
+ Ok(())
+ }
+
+ fn load(&mut self, source_id: &SourceId, override: bool) -> CargoResult<()> {
(|| {
- let mut source = namespace.load(self.config);
+ let mut source = source_id.load(self.config);
let dst = if override {&mut self.overrides} else {&mut self.summaries};
// Ensure the source has fetched all necessary remote data.
// Save off the source
self.sources.insert(namespace, source);
+ // Track that the source has been searched
+ self.searched.push(source_id.clone());
+
Ok(())
- }).chain_error(|| human(format!("Unable to update {}", namespace)))
+ }).chain_error(|| human(format!("Unable to update {}", source_id)))
}
fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> {
let overrides = self.query_overrides(dep);
if overrides.is_empty() {
- // Ensure the requested namespace is loaded
- try!(self.ensure_loaded(dep.get_namespace()));
+ // Ensure the requested source_id is loaded
+ try!(self.ensure_loaded(dep.get_source_id()));
self.summaries.query(dep)
} else {
Ok(overrides)
}
}
}
+
+#[cfg(test)]
+pub mod test {
+ use core::{Summary, Registry, Dependency};
+ use util::{CargoResult};
+
+ pub struct RegistryBuilder {
+ summaries: Vec<Summary>,
+ overrides: Vec<Summary>
+ }
+
+ impl RegistryBuilder {
+ pub fn new() -> RegistryBuilder {
+ RegistryBuilder { summaries: vec!(), overrides: vec!() }
+ }
+
+ pub fn summary(mut self, summary: Summary) -> RegistryBuilder {
+ self.summaries.push(summary);
+ self
+ }
+
+ pub fn summaries(mut self, summaries: Vec<Summary>) -> RegistryBuilder {
+ self.summaries.push_all_move(summaries);
+ self
+ }
+
+ pub fn override(mut self, summary: Summary) -> RegistryBuilder {
+ self.overrides.push(summary);
+ self
+ }
+
+ pub fn overrides(mut self, summaries: Vec<Summary>) -> RegistryBuilder {
+ self.overrides.push_all_move(summaries);
+ self
+ }
+
+ fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> {
+ self.overrides.iter()
+ .filter(|s| s.get_name() == dep.get_name())
+ .map(|s| s.clone())
+ .collect()
+ }
+ }
+
+ impl Registry for RegistryBuilder {
+ fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
+ debug!("querying; dep={}", dep);
+
+ let overrides = self.query_overrides(dep);
+
+ if overrides.is_empty() {
+ self.summaries.query(dep)
+ } else {
+ Ok(overrides)
+ }
+ }
+ }
+}
use std::collections::HashMap;
use std::fmt;
+use serialize::{Encodable, Encoder};
use util::graph::{Nodes,Edges};
use core::{
use util::{CargoResult, Graph, human, internal};
pub struct Resolve {
- graph: Graph<PackageId>
+ graph: Graph<PackageId>,
+ root: PackageId
+}
+
+#[deriving(Encodable, Decodable, Show)]
+pub struct EncodableResolve {
+ package: Vec<EncodableDependency>
+}
+
+#[deriving(Encodable, Decodable, Show)]
+pub struct EncodableDependency{
+ name: String,
+ version: String,
+ source: SourceId,
+ dependencies: Option<Vec<PackageId>>
+}
+
+impl<E, S: Encoder<E>> Encodable<S, E> for Resolve {
+ fn encode(&self, s: &mut S) -> Result<(), E> {
+ let mut ids: Vec<&PackageId> = self.graph.iter().collect();
+ ids.sort();
+
+ let encodable = ids.iter().filter_map(|&id| {
+ if self.root == *id { return None; }
+
+ let deps = self.graph.edges(id).map(|edge| {
+ let mut deps = edge.map(|e| e.clone()).collect::<Vec<PackageId>>();
+ deps.sort();
+ deps
+ });
+
+ Some(EncodableDependency {
+ name: id.get_name().to_string(),
+ version: id.get_version().to_string(),
+ source: id.get_source_id().clone(),
+ dependencies: deps
+ })
+ }).collect::<Vec<EncodableDependency>>();
+
+ EncodableResolve { package: encodable }.encode(s)
+ }
}
impl Resolve {
- fn new() -> Resolve {
- Resolve { graph: Graph::new() }
+ fn new(root: PackageId) -> Resolve {
+ Resolve { graph: Graph::new(), root: root }
}
pub fn iter(&self) -> Nodes<PackageId> {
}
impl<'a, R: Registry> Context<'a, R> {
- fn new(registry: &'a mut R) -> Context<'a, R> {
+ fn new(registry: &'a mut R, root: PackageId) -> Context<'a, R> {
Context {
registry: registry,
- resolve: Resolve::new(),
+ resolve: Resolve::new(root),
seen: HashMap::new()
}
}
-> CargoResult<Resolve> {
log!(5, "resolve; deps={}", deps);
- let mut context = Context::new(registry);
+ let mut context = Context::new(registry, root.clone());
try!(resolve_deps(root, deps, &mut context));
log!(5, " result={}", context.resolve);
Ok(context.resolve)
use std::fmt::{Show, Formatter};
use std::hash;
use std::c_str::CString;
+use std::cmp::Ordering;
use serialize::{Decodable, Decoder, Encodable, Encoder};
+use url;
use url::Url;
use core::{Summary, Package, PackageId};
}
}
-#[deriving(Encodable, Decodable, Clone, Eq)]
+#[deriving(Clone, Eq)]
pub struct SourceId {
- pub kind: SourceKind,
pub location: Location,
+ pub kind: SourceKind,
+ // e.g. the exact git revision of the specified branch for a Git Source
+ pub precise: Option<String>
}
impl Show for Location {
}
}
+impl PartialOrd for SourceId {
+ fn partial_cmp(&self, other: &SourceId) -> Option<Ordering> {
+ self.to_string().partial_cmp(&other.to_string())
+ }
+}
+
+impl Ord for SourceId {
+ fn cmp(&self, other: &SourceId) -> Ordering {
+ self.to_string().cmp(&other.to_string())
+ }
+}
+
impl Location {
pub fn parse(s: &str) -> CargoResult<Location> {
if s.starts_with("file:") {
unsafe fn to_c_str_unchecked(&self) -> CString { self.to_c_str() }
}
+impl<E, S: Encoder<E>> Encodable<S, E> for SourceId {
+ fn encode(&self, s: &mut S) -> Result<(), E> {
+ if self.is_path() {
+ s.emit_option_none()
+ } else {
+ self.to_url().encode(s)
+ }
+ }
+}
+
+impl<E, D: Decoder<E>> Decodable<D, E> for SourceId {
+ fn decode(d: &mut D) -> Result<SourceId, E> {
+ let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId");
+ Ok(SourceId::from_url(string))
+ }
+}
+
impl Show for SourceId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
- SourceId { kind: PathKind, ref location } => {
+ SourceId { kind: PathKind, ref location, .. } => {
try!(write!(f, "{}", location))
},
- SourceId { kind: GitKind(ref reference), ref location } => {
+ SourceId { kind: GitKind(ref reference), ref location, ref precise, .. } => {
try!(write!(f, "{}", location));
if reference.as_slice() != "master" {
- try!(write!(f, "#ref={}", reference));
+ try!(write!(f, "?ref={}", reference));
+ }
+
+ if precise.is_some() {
+ try!(write!(f, "#{}", precise.get_ref()));
}
},
SourceId { kind: RegistryKind, .. } => {
match *self {
SourceId {
kind: ref kind @ GitKind(..),
- location: Remote(ref url)
+ location: Remote(ref url),
+ precise: None
} => {
kind.hash(into);
git::canonicalize_url(url.to_string().as_slice()).hash(into);
impl SourceId {
pub fn new(kind: SourceKind, location: Location) -> SourceId {
- SourceId { kind: kind, location: location }
+ SourceId { kind: kind, location: location, precise: None }
+ }
+
+ pub fn from_url(string: String) -> SourceId {
+ let mut parts = string.as_slice().splitn('+', 1);
+ let kind = parts.nth(0).unwrap();
+ let mut url = Url::parse(parts.nth(0).unwrap()).ok().expect("Invalid URL");
+
+ match kind {
+ "git" => {
+ let reference = {
+ url.path.query.iter()
+ .find(|&&(ref k, ref v)| k.as_slice() == "ref")
+ .map(|&(ref k, ref v)| v.to_string())
+ .unwrap_or("master".to_string())
+ .to_string()
+ };
+
+ url.path.query = url.path.query.iter()
+ .filter(|&&(ref k,_)| k.as_slice() != "ref")
+ .map(|q| q.clone())
+ .collect();
+
+ let precise = url.path.fragment.clone();
+ url.path.fragment = None;
+
+ SourceId::for_git(&url, reference.as_slice(), precise)
+ },
+ _ => fail!("Unsupported serialized SourceId")
+ }
+ }
+
+ pub fn to_url(&self) -> String {
+ match *self {
+ SourceId { kind: PathKind, ref location, .. } => {
+ fail!("Path sources are not included in the lockfile, so this is unimplemented");
+ },
+ SourceId { kind: GitKind(ref reference), ref location, ref precise, .. } => {
+ let ref_str = if reference.as_slice() != "master" {
+ format!("?ref={}", reference)
+ } else {
+ "".to_string()
+ };
+
+ let precise_str = if precise.is_some() {
+ format!("#{}", precise.get_ref())
+ } else {
+ "".to_string()
+ };
+
+ format!("git+{}{}{}", location, ref_str, precise_str)
+ },
+ SourceId { kind: RegistryKind, .. } => {
+ // TODO: Central registry vs. alternates
+ "registry+https://crates.io/".to_string()
+ }
+ }
}
// Pass absolute path
SourceId::new(PathKind, Local(path.clone()))
}
- pub fn for_git(url: &Url, reference: &str) -> SourceId {
- SourceId::new(GitKind(reference.to_string()), Remote(url.clone()))
+ pub fn for_git(url: &Url, reference: &str, precise: Option<String>) -> SourceId {
+ let mut id = SourceId::new(GitKind(reference.to_string()), Remote(url.clone()));
+ if precise.is_some() {
+ id = id.with_precise(precise.unwrap());
+ }
+
+ id
}
pub fn for_central() -> SourceId {
RegistryKind => unimplemented!()
}
}
+
+ pub fn with_precise(&self, v: String) -> SourceId {
+ SourceId {
+ precise: Some(v),
+ .. self.clone()
+ }
+ }
}
pub struct SourceMap {
#![feature(default_type_params)]
#![deny(warnings)]
+extern crate collections;
extern crate debug;
+extern crate regex;
+extern crate semver;
+extern crate serialize;
extern crate term;
-extern crate collections;
extern crate url;
-extern crate serialize;
-extern crate semver;
+#[phase(plugin)] extern crate regex_macros;
#[phase(plugin, link)] extern crate log;
-extern crate toml;
extern crate docopt;
+extern crate toml;
#[cfg(test)] extern crate hamcrest;
use std::os;
--- /dev/null
+use std::collections::TreeMap;
+use std::io::fs::File;
+use serialize::{Encodable, Decodable};
+use toml;
+use toml::{Encoder, Decoder};
+use core::registry::PackageRegistry;
+use core::{MultiShell, Source, Resolve, resolver};
+use sources::{PathSource};
+use util::config::{Config};
+use util::{CargoResult};
+
+pub fn generate_lockfile(manifest_path: &Path,
+ shell: &mut MultiShell,
+ update: bool)
+ -> CargoResult<()> {
+
+ log!(4, "compile; manifest-path={}", manifest_path.display());
+
+ let mut source = PathSource::for_path(&manifest_path.dir_path());
+ try!(source.update());
+
+ // TODO: Move this into PathSource
+ let package = try!(source.get_root_package());
+ debug!("loaded package; package={}", package);
+
+ for key in package.get_manifest().get_unused_keys().iter() {
+ try!(shell.warn(format!("unused manifest key: {}", key)));
+ }
+
+ let source_ids = package.get_source_ids();
+
+ let resolve = {
+ let mut config = try!(Config::new(shell, update, None, None));
+
+ let mut registry =
+ try!(PackageRegistry::new(source_ids, vec![], &mut config));
+
+ try!(resolver::resolve(package.get_package_id(),
+ package.get_dependencies(),
+ &mut registry))
+ };
+
+ write_resolve(resolve);
+ Ok(())
+}
+
+fn write_resolve(resolve: Resolve) {
+ let mut e = Encoder::new();
+ resolve.encode(&mut e).unwrap();
+
+ let deps = e.toml.find(&"package".to_string()).unwrap().as_slice().unwrap();
+ let mut out = String::new();
+
+ for dep in deps.iter() {
+ let dep = dep.as_table().unwrap();
+ out.push_str("[[package]]\n");
+ out.push_str(format!("name = {}\n", lookup(dep, "name")).as_slice());
+ out.push_str(format!("version = {}\n", lookup(dep, "version")).as_slice());
+
+ dep.find(&"source".to_string()).map(|s| {
+ out.push_str(format!("source = {}\n", lookup(dep, "source")).as_slice());
+ });
+
+ dep.find(&"dependencies".to_string()).map(|s| {
+ let slice = s.as_slice().unwrap();
+
+ if !slice.is_empty() {
+ out.push_str("dependencies = [\n");
+
+ for child in s.as_slice().unwrap().iter() {
+ out.push_str(format!(" {},\n", child).as_slice());
+ }
+
+ out.push_str("]\n");
+ }
+ out.push_str("\n");
+ });
+ }
+
+ let mut file = File::create(&Path::new("Cargo.lock"));
+ write!(file, "{}", out);
+
+ let mut d = Decoder::new(toml::Table(e.toml.clone()));
+ let v: resolver::EncodableResolve = Decodable::decode(&mut d).unwrap();
+
+ println!("{}", v);
+}
+
+fn lookup<'a>(table: &'a TreeMap<String, toml::Value>, key: &'static str) -> &'a toml::Value {
+ table.find(&key.to_string()).unwrap()
+}
pub use self::cargo_run::run;
pub use self::cargo_new::{new, NewOptions};
pub use self::cargo_doc::{doc, DocOptions};
+pub use self::cargo_generate_lockfile::generate_lockfile;
mod cargo_clean;
mod cargo_compile;
mod cargo_run;
mod cargo_new;
mod cargo_doc;
+mod cargo_generate_lockfile;
reference: GitReference,
db_path: Path,
checkout_path: Path,
- path_source: PathSource,
+ source_id: SourceId,
+ path_source: Option<PathSource>,
config: &'a mut Config<'b>
}
let checkout_path = config.git_checkout_path()
.join(ident.as_slice()).join(reference.as_slice());
- let path_source = PathSource::new(&checkout_path, source_id);
-
GitSource {
remote: remote,
reference: GitReference::for_str(reference.as_slice()),
db_path: db_path,
checkout_path: checkout_path,
- path_source: path_source,
+ source_id: source_id.clone(),
+ path_source: None,
config: config
}
}
- pub fn get_namespace(&self) -> &Location {
+ pub fn get_location(&self) -> &Location {
self.remote.get_location()
}
}
self.remote.db_at(&self.db_path)
};
- try!(repo.copy_to(self.reference.as_slice(), &self.checkout_path));
+ let checkout = try!(repo.copy_to(self.reference.as_slice(), &self.checkout_path));
+
+ let source_id = self.source_id.with_precise(checkout.get_rev().to_string());
+ let path_source = PathSource::new(&self.checkout_path, &source_id);
- self.path_source.update()
+ self.path_source = Some(path_source);
+ self.path_source.as_mut().unwrap().update()
}
fn list(&self) -> CargoResult<Vec<Summary>> {
- self.path_source.list()
+ self.path_source.as_ref().expect("BUG: update() must be called before list()").list()
}
fn download(&self, _: &[PackageId]) -> CargoResult<()> {
fn get(&self, ids: &[PackageId]) -> CargoResult<Vec<Package>> {
log!(5, "getting packages for package ids `{}` from `{}`", ids, self.remote);
- self.path_source.get(ids)
+ self.path_source.as_ref().expect("BUG: update() must be called before get()").get(ids)
}
fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
Ok(checkout)
}
- fn get_source<'a>(&'a self) -> &'a Path {
+ fn get_source(&self) -> &Path {
self.database.get_path()
}
+ pub fn get_rev(&self) -> &str {
+ self.revision.as_slice()
+ }
+
fn clone_repo(&self) -> CargoResult<()> {
let dirname = Path::new(self.location.dirname());
use std::hash::Hash;
use std::collections::{HashMap, HashSet};
use std::collections::hashmap::{Keys, SetItems};
+use serialize::Decodable;
pub struct Graph<N> {
nodes: HashMap<N, HashSet<N>>
mod test_cargo_compile_path_deps;
mod test_cargo_test;
mod test_shell;
-mod test_cargo_cross_compile;
+// mod test_cargo_cross_compile;
mod test_cargo_run;
mod test_cargo_version;
mod test_cargo_new;